Telegram Group & Telegram Channel
Напишите логистическую регрессию

import numpy as np

class LogisticRegression:

def __init__(self, learning_rate=0.01, n_iters=1000):
self.learning_rate = learning_rate
self.n_iters = n_iters
self.weights = None
self.bias = None

def fit(self, X, y):
# initialize weights and bias to zeros
n_samples, n_features = X.shape
self.weights = np.zeros(n_features)
self.bias = 0

# gradient descent optimization
for i in range(self.n_iters):
# calculate predicted probabilities and cost
z = np.dot(X, self.weights) + self.bias
y_pred = self._sigmoid(z)
cost = (-1 / n_samples) * np.sum(y * np.log(y_pred) + (1 - y) * np.log(1 - y_pred))

# calculate gradients
dw = (1 / n_samples) * np.dot(X.T, (y_pred - y))
db = (1 / n_samples) * np.sum(y_pred - y)

# update weights and bias
self.weights -= self.learning_rate * dw
self.bias -= self.learning_rate * db

def predict(self, X):
# calculate predicted probabilities
z = np.dot(X, self.weights) + self.bias
y_pred = self._sigmoid(z)
# convert probabilities to binary predictions
return np.round(y_pred).astype(int)

def _sigmoid(self, z):
return 1 / (1 + np.exp(-z))


#python
#машинное_обучение



tg-me.com/ds_interview_lib/611
Create:
Last Update:

Напишите логистическую регрессию

import numpy as np

class LogisticRegression:

def __init__(self, learning_rate=0.01, n_iters=1000):
self.learning_rate = learning_rate
self.n_iters = n_iters
self.weights = None
self.bias = None

def fit(self, X, y):
# initialize weights and bias to zeros
n_samples, n_features = X.shape
self.weights = np.zeros(n_features)
self.bias = 0

# gradient descent optimization
for i in range(self.n_iters):
# calculate predicted probabilities and cost
z = np.dot(X, self.weights) + self.bias
y_pred = self._sigmoid(z)
cost = (-1 / n_samples) * np.sum(y * np.log(y_pred) + (1 - y) * np.log(1 - y_pred))

# calculate gradients
dw = (1 / n_samples) * np.dot(X.T, (y_pred - y))
db = (1 / n_samples) * np.sum(y_pred - y)

# update weights and bias
self.weights -= self.learning_rate * dw
self.bias -= self.learning_rate * db

def predict(self, X):
# calculate predicted probabilities
z = np.dot(X, self.weights) + self.bias
y_pred = self._sigmoid(z)
# convert probabilities to binary predictions
return np.round(y_pred).astype(int)

def _sigmoid(self, z):
return 1 / (1 + np.exp(-z))


#python
#машинное_обучение

BY Библиотека собеса по Data Science | вопросы с собеседований


Warning: Undefined variable $i in /var/www/tg-me/post.php on line 283

Share with your friend now:
tg-me.com/ds_interview_lib/611

View MORE
Open in Telegram


Библиотека собеса по Data Science | вопросы с собеседований Telegram | DID YOU KNOW?

Date: |

China’s stock markets are some of the largest in the world, with total market capitalization reaching RMB 79 trillion (US$12.2 trillion) in 2020. China’s stock markets are seen as a crucial tool for driving economic growth, in particular for financing the country’s rapidly growing high-tech sectors.Although traditionally closed off to overseas investors, China’s financial markets have gradually been loosening restrictions over the past couple of decades. At the same time, reforms have sought to make it easier for Chinese companies to list on onshore stock exchanges, and new programs have been launched in attempts to lure some of China’s most coveted overseas-listed companies back to the country.

Библиотека собеса по Data Science | вопросы с собеседований from sg


Telegram Библиотека собеса по Data Science | вопросы с собеседований
FROM USA